Merge remote-tracking branch 'origin/master' into scenarios

Andrew Cantino 10 年之前
父节点
当前提交
f3ffb33a7d

+ 2 - 0
Gemfile

@@ -74,6 +74,8 @@ gem 'slack-notifier', '~> 0.5.0'
74 74
 
75 75
 gem 'therubyracer', '~> 0.12.1'
76 76
 
77
+gem 'mqtt'
78
+
77 79
 group :development do
78 80
   gem 'binding_of_caller'
79 81
   gem 'better_errors'

+ 2 - 0
Gemfile.lock

@@ -160,6 +160,7 @@ GEM
160 160
     mime-types (1.25.1)
161 161
     mini_portile (0.5.3)
162 162
     minitest (5.3.3)
163
+    mqtt (0.2.0)
163 164
     multi_json (1.9.3)
164 165
     multi_xml (0.5.5)
165 166
     multipart-post (2.0.0)
@@ -341,6 +342,7 @@ DEPENDENCIES
341 342
   kaminari (~> 0.15.1)
342 343
   kramdown (~> 1.3.3)
343 344
   liquid (~> 2.6.1)
345
+  mqtt
344 346
   mysql2 (~> 0.3.15)
345 347
   nokogiri (~> 1.6.1)
346 348
   protected_attributes (~> 1.0.7)

+ 9 - 7
Procfile

@@ -1,11 +1,13 @@
1
-# Procfile for development:
1
+# Procfile for development using the new threaded worker (scheduler, twitter stream and delayed job)
2 2
 web: bundle exec rails server
3
-schedule: bundle exec rails runner bin/schedule.rb
4
-twitter: bundle exec rails runner bin/twitter_stream.rb
5
-dj: bundle exec script/delayed_job run
3
+jobs: bundle exec rails runner bin/threaded.rb
6 4
 
7 5
 # Possible Profile configuration for production:
8 6
 # web: bundle exec unicorn -c config/unicorn/production.rb
9
-# schedule: bundle exec rails runner bin/schedule.rb
10
-# twitter: bundle exec rails runner bin/twitter_stream.rb
11
-# dj: bundle exec script/delayed_job run
7
+# jobs: bundle exec rails runner bin/threaded.rb
8
+
9
+# Old version with seperate processes (use this if you have issues with the threaded version)
10
+#web: bundle exec rails server
11
+#schedule: bundle exec rails runner bin/schedule.rb
12
+#twitter: bundle exec rails runner bin/twitter_stream.rb
13
+#dj: bundle exec script/delayed_job run

+ 2 - 2
app/models/agents/digest_email_agent.rb

@@ -1,5 +1,5 @@
1 1
 module Agents
2
-  class DigestEmailAgent < Agent
2
+  class EmailDigestAgent < Agent
3 3
     include EmailConcern
4 4
 
5 5
     default_schedule "5am"
@@ -7,7 +7,7 @@ module Agents
7 7
     cannot_create_events!
8 8
 
9 9
     description <<-MD
10
-      The DigestEmailAgent collects any Events sent to it and sends them all via email when run.
10
+      The EmailDigestAgent collects any Events sent to it and sends them all via email when run.
11 11
       The email will be sent to your account's address and will have a `subject` and an optional `headline` before
12 12
       listing the Events.  If the Events' payloads contain a `message`, that will be highlighted, otherwise everything in
13 13
       their payloads will be shown.

+ 138 - 0
app/models/agents/mqtt_agent.rb

@@ -0,0 +1,138 @@
1
+# encoding: utf-8 
2
+require "mqtt"
3
+require "json"
4
+
5
+module Agents
6
+  class MqttAgent < Agent
7
+    description <<-MD
8
+      The MQTT agent allows both publication and subscription to an MQTT topic.
9
+
10
+      MQTT is a generic transport protocol for machine to machine communication.
11
+
12
+      You can do things like:
13
+
14
+       * Publish to [RabbitMQ](http://www.rabbitmq.com/mqtt.html)
15
+       * Run [OwnTracks, a location tracking tool](http://owntracks.org/) for iOS and Android
16
+       * Subscribe to your home automation setup like [Ninjablocks](http://forums.ninjablocks.com/index.php?p=/discussion/661/today-i-learned-about-mqtt/p1) or [TheThingSystem](http://thethingsystem.com/dev/supported-things.html)
17
+
18
+      Simply choose a topic (think email subject line) to publish/listen to, and configure your service.
19
+
20
+      It's easy to setup your own [broker](http://jpmens.net/2013/09/01/installing-mosquitto-on-a-raspberry-pi/) or connect to a [cloud service](www.cloudmqtt.com)
21
+
22
+      Hints:
23
+      Many services run mqtts (mqtt over SSL) often with a custom certificate.
24
+
25
+      You'll want to download their cert and install it locally, specifying the ```certificate_path``` configuration.
26
+
27
+
28
+      Example configuration:
29
+
30
+      <pre><code>{
31
+        'uri' => 'mqtts://user:pass@locahost:8883'
32
+        'ssl' => :TLSv1,
33
+        'ca_file' => './ca.pem',
34
+        'cert_file' => './client.crt',
35
+        'key_file' => './client.key',
36
+        'topic' => 'huginn'
37
+      }
38
+      </code></pre>
39
+
40
+      Subscribe to CloCkWeRX's TheThingSystem instance (thethingsystem.com), where
41
+      temperature and other events are being published.
42
+
43
+      <pre><code>{
44
+        'uri' => 'mqtt://kcqlmkgx:sVNoccqwvXxE@m10.cloudmqtt.com:13858',
45
+        'topic' => 'the_thing_system/demo'
46
+      }
47
+      </code></pre>
48
+
49
+      Subscribe to all topics
50
+      <pre><code>{
51
+        'uri' => 'mqtt://kcqlmkgx:sVNoccqwvXxE@m10.cloudmqtt.com:13858',
52
+        'topic' => '/#'
53
+      }
54
+      </code></pre>
55
+
56
+      Find out more detail on [subscription wildcards](http://www.eclipse.org/paho/files/mqttdoc/Cclient/wildcard.html)
57
+    MD
58
+
59
+    event_description <<-MD
60
+      Events are simply nested MQTT payloads. For example, an MQTT payload for Owntracks
61
+
62
+      <pre><code>{
63
+        "topic": "owntracks/kcqlmkgx/Dan",
64
+        "message": {"_type": "location", "lat": "-34.8493644", "lon": "138.5218119", "tst": "1401771049", "acc": "50.0", "batt": "31", "desc": "Home", "event": "enter"},
65
+        "time": 1401771051
66
+      }</code></pre>
67
+    MD
68
+
69
+    def validate_options
70
+      unless options['uri'].present? &&
71
+        options['topic'].present?
72
+        errors.add(:base, "topic and uri are required")
73
+      end
74
+    end
75
+
76
+    def working?
77
+      event_created_within?(options['expected_update_period_in_days']) && !recent_error_logs?
78
+    end
79
+
80
+    def default_options
81
+      {
82
+        'uri' => 'mqtts://user:pass@locahost:8883',
83
+        'ssl' => :TLSv1,
84
+        'ca_file'  => './ca.pem',
85
+        'cert_file' => './client.crt',
86
+        'key_file' => './client.key',
87
+        'topic' => 'huginn',
88
+        'max_read_time' => '10'
89
+      }
90
+    end
91
+
92
+    def mqtt_client
93
+      @client ||= MQTT::Client.new(options['uri'])
94
+
95
+      if options['ssl']
96
+        @client.ssl = options['ssl'].to_sym
97
+        @client.ca_file = options['ca_file']
98
+        @client.cert_file = options['cert_file']
99
+        @client.key_file = options['key_file']
100
+      end
101
+
102
+      @client
103
+    end
104
+
105
+    def receive(incoming_events)
106
+      mqtt_client.connect do |c|
107
+        incoming_events.each do |event|
108
+          c.publish(options['topic'], payload)
109
+        end
110
+
111
+        c.disconnect
112
+      end
113
+    end
114
+
115
+
116
+    def check
117
+      mqtt_client.connect do |c|
118
+
119
+        Timeout::timeout((options['max_read_time'].presence || 15).to_i) {
120
+          c.get(options['topic']) do |topic, message|
121
+
122
+            # A lot of services generate JSON. Try that first
123
+            payload = JSON.parse(message) rescue message
124
+
125
+            create_event :payload => { 
126
+              'topic' => topic, 
127
+              'message' => payload, 
128
+              'time' => Time.now.to_i 
129
+            }
130
+          end
131
+        } rescue TimeoutError
132
+
133
+        c.disconnect   
134
+      end
135
+    end
136
+
137
+  end
138
+end

+ 6 - 3
app/models/agents/weather_agent.rb

@@ -19,6 +19,8 @@ module Agents
19 19
       You must setup an [API key for Wunderground](http://www.wunderground.com/weather/api/) in order to use this Agent with Wunderground.
20 20
 
21 21
       You must setup an [API key for Forecast](https://developer.forecast.io/) in order to use this Agent with ForecastIO.
22
+
23
+      Set `expected_update_period_in_days` to the maximum amount of time that you'd expect to pass between Events being created by this Agent.
22 24
     MD
23 25
 
24 26
     event_description <<-MD
@@ -49,7 +51,7 @@ module Agents
49 51
     default_schedule "8pm"
50 52
 
51 53
     def working?
52
-      event_created_within?(2) && !recent_error_logs?
54
+      event_created_within?((options['expected_update_period_in_days'].presence || 2).to_i) && !recent_error_logs?
53 55
     end
54 56
 
55 57
     def key_setup?
@@ -61,7 +63,8 @@ module Agents
61 63
         'service' => 'wunderground',
62 64
         'api_key' => 'your-key',
63 65
         'location' => '94103',
64
-        'which_day' => '1'
66
+        'which_day' => '1',
67
+        'expected_update_period_in_days' => '2'
65 68
       }
66 69
     end
67 70
 
@@ -163,7 +166,7 @@ module Agents
163 166
               'ozone' => value.ozone.to_s
164 167
             }
165 168
             return day
166
-          end    
169
+          end
167 170
         end
168 171
       end
169 172
     end

+ 0 - 82
bin/schedule.rb

@@ -11,87 +11,5 @@ unless defined?(Rails)
11 11
   exit 1
12 12
 end
13 13
 
14
-require 'rufus/scheduler'
15
-
16
-class HuginnScheduler
17
-  attr_accessor :mutex
18
-
19
-  def run_schedule(time)
20
-    with_mutex do
21
-      puts "Queuing schedule for #{time}"
22
-      Agent.delay.run_schedule(time)
23
-    end
24
-  end
25
-
26
-  def propagate!
27
-    with_mutex do
28
-      puts "Queuing event propagation"
29
-      Agent.delay.receive!
30
-    end
31
-  end
32
-
33
-  def cleanup_expired_events!
34
-    with_mutex do
35
-      puts "Running event cleanup"
36
-      Event.delay.cleanup_expired!
37
-    end
38
-  end
39
-
40
-  def with_mutex
41
-    ActiveRecord::Base.connection_pool.with_connection do
42
-      mutex.synchronize do
43
-        yield
44
-      end
45
-    end
46
-  end
47
-
48
-  def run!
49
-    self.mutex = Mutex.new
50
-
51
-    rufus_scheduler = Rufus::Scheduler.new
52
-
53
-    tzinfo_friendly_timezone = ActiveSupport::TimeZone::MAPPING[ENV['TIMEZONE'].present? ? ENV['TIMEZONE'] : "Pacific Time (US & Canada)"]
54
-
55
-    # Schedule event propagation.
56
-
57
-    rufus_scheduler.every '1m' do
58
-      propagate!
59
-    end
60
-
61
-    # Schedule event cleanup.
62
-
63
-    rufus_scheduler.cron "0 0 * * * " + tzinfo_friendly_timezone do
64
-      cleanup_expired_events!
65
-    end
66
-
67
-    # Schedule repeating events.
68
-
69
-    %w[1m 2m 5m 10m 30m 1h 2h 5h 12h 1d 2d 7d].each do |schedule|
70
-      rufus_scheduler.every schedule do
71
-        run_schedule "every_#{schedule}"
72
-      end
73
-    end
74
-
75
-    # Schedule events for specific times.
76
-
77
-    # Times are assumed to be in PST for now.  Can store a user#timezone later.
78
-    24.times do |hour|
79
-      rufus_scheduler.cron "0 #{hour} * * * " + tzinfo_friendly_timezone do
80
-        if hour == 0
81
-          run_schedule "midnight"
82
-        elsif hour < 12
83
-          run_schedule "#{hour}am"
84
-        elsif hour == 12
85
-          run_schedule "noon"
86
-        else
87
-          run_schedule "#{hour - 12}pm"
88
-        end
89
-      end
90
-    end
91
-
92
-    rufus_scheduler.join
93
-  end
94
-end
95
-
96 14
 scheduler = HuginnScheduler.new
97 15
 scheduler.run!

+ 57 - 0
bin/threaded.rb

@@ -0,0 +1,57 @@
1
+require 'thread'
2
+
3
+def stop
4
+  puts 'Exiting...'
5
+  @scheduler.stop
6
+  @dj.stop
7
+  @stream.stop
8
+end
9
+
10
+def safely(&block)
11
+  begin
12
+    yield block
13
+  rescue StandardError => e
14
+    STDERR.puts "\nException #{e.message}:\n#{e.backtrace.join("\n")}\n\n"
15
+    STDERR.puts "Terminating myself ..."
16
+    stop
17
+  end
18
+end
19
+
20
+threads = []
21
+threads << Thread.new do
22
+  safely do
23
+    @stream = TwitterStream.new
24
+    @stream.run
25
+    puts "Twitter stream stopped ..."
26
+  end
27
+end
28
+
29
+threads << Thread.new do
30
+  safely do
31
+    @scheduler = HuginnScheduler.new
32
+    @scheduler.run!
33
+    puts "Scheduler stopped ..."
34
+  end
35
+end
36
+
37
+threads << Thread.new do
38
+  safely do
39
+    require 'delayed/command'
40
+    @dj = Delayed::Worker.new
41
+    @dj.start
42
+    puts "Delayed job stopped ..."
43
+  end
44
+end
45
+
46
+# We need to wait a bit to let delayed_job set it's traps so we can override them
47
+sleep 0.5
48
+
49
+trap('TERM') do
50
+  stop
51
+end
52
+
53
+trap('INT') do
54
+  stop
55
+end
56
+
57
+threads.collect { |t| t.join }

+ 1 - 112
bin/twitter_stream.rb

@@ -12,115 +12,4 @@ unless defined?(Rails)
12 12
   exit 1
13 13
 end
14 14
 
15
-require 'cgi'
16
-require 'json'
17
-require 'twitter/json_stream'
18
-require 'em-http-request'
19
-require 'pp'
20
-
21
-def stream!(filters, agent, &block)
22
-  stream = Twitter::JSONStream.connect(
23
-    :path    => "/1/statuses/#{(filters && filters.length > 0) ? 'filter' : 'sample'}.json#{"?track=#{filters.map {|f| CGI::escape(f) }.join(",")}" if filters && filters.length > 0}",
24
-    :ssl     => true,
25
-    :oauth   => {
26
-      :consumer_key    => agent.twitter_consumer_key,
27
-      :consumer_secret => agent.twitter_consumer_secret,
28
-      :access_key      => agent.twitter_oauth_token,
29
-      :access_secret   => agent.twitter_oauth_token_secret
30
-    }
31
-  )
32
-
33
-  stream.each_item do |status|
34
-    status = JSON.parse(status) if status.is_a?(String)
35
-    next unless status
36
-    next if status.has_key?('delete')
37
-    next unless status['text']
38
-    status['text'] = status['text'].gsub(/&lt;/, "<").gsub(/&gt;/, ">").gsub(/[\t\n\r]/, '  ')
39
-    block.call(status)
40
-  end
41
-
42
-  stream.on_error do |message|
43
-    STDERR.puts " --> Twitter error: #{message} <--"
44
-  end
45
-
46
-  stream.on_no_data do |message|
47
-    STDERR.puts " --> Got no data for awhile; trying to reconnect."
48
-    EventMachine::stop_event_loop
49
-  end
50
-
51
-  stream.on_max_reconnects do |timeout, retries|
52
-    STDERR.puts " --> Oops, tried too many times! <--"
53
-    EventMachine::stop_event_loop
54
-  end
55
-end
56
-
57
-def load_and_run(agents)
58
-  agents.group_by { |agent| agent.twitter_oauth_token }.each do |oauth_token, agents|
59
-    filter_to_agent_map = agents.map { |agent| agent.options[:filters] }.flatten.uniq.compact.map(&:strip).inject({}) { |m, f| m[f] = []; m }
60
-
61
-    agents.each do |agent|
62
-      agent.options[:filters].flatten.uniq.compact.map(&:strip).each do |filter|
63
-        filter_to_agent_map[filter] << agent
64
-      end
65
-    end
66
-
67
-    recent_tweets = []
68
-
69
-    stream!(filter_to_agent_map.keys, agents.first) do |status|
70
-      if status["retweeted_status"].present? && status["retweeted_status"].is_a?(Hash)
71
-        puts "Skipping retweet: #{status["text"]}"
72
-      elsif recent_tweets.include?(status["id_str"])
73
-        puts "Skipping duplicate tweet: #{status["text"]}"
74
-      else
75
-        recent_tweets << status["id_str"]
76
-        recent_tweets.shift if recent_tweets.length > DUPLICATE_DETECTION_LENGTH
77
-        puts status["text"]
78
-        filter_to_agent_map.keys.each do |filter|
79
-          if (filter.downcase.split(SEPARATOR) - status["text"].downcase.split(SEPARATOR)).reject(&:empty?) == [] # Hacky McHackerson
80
-            filter_to_agent_map[filter].each do |agent|
81
-              puts " -> #{agent.name}"
82
-              agent.process_tweet(filter, status)
83
-            end
84
-          end
85
-        end
86
-      end
87
-    end
88
-  end
89
-end
90
-
91
-RELOAD_TIMEOUT = 10.minutes
92
-DUPLICATE_DETECTION_LENGTH = 1000
93
-SEPARATOR = /[^\w_\-]+/
94
-
95
-while true
96
-  begin
97
-    agents = Agents::TwitterStreamAgent.all
98
-
99
-    EventMachine::run do
100
-      EventMachine.add_periodic_timer(RELOAD_TIMEOUT) {
101
-        puts "Reloading EventMachine and all Agents..."
102
-        EventMachine::stop_event_loop
103
-      }
104
-
105
-      if agents.length == 0
106
-        puts "No agents found.  Will look again in a minute."
107
-        sleep 60
108
-        EventMachine::stop_event_loop
109
-      else
110
-        puts "Found #{agents.length} agent(s).  Loading them now..."
111
-        load_and_run agents
112
-      end
113
-    end
114
-
115
-    print "Pausing..."; STDOUT.flush
116
-    sleep 1
117
-    puts "done."
118
-  rescue SignalException, SystemExit
119
-    EventMachine::stop_event_loop if EventMachine.reactor_running?
120
-    exit
121
-  rescue StandardError => e
122
-    STDERR.puts "\nException #{e.message}:\n#{e.backtrace.join("\n")}\n\n"
123
-    STDERR.puts "Waiting for a couple of minutes..."
124
-    sleep 120
125
-  end
126
-end
15
+TwitterStream.new.run

+ 1 - 0
config/initializers/delayed_job.rb

@@ -1,6 +1,7 @@
1 1
 Delayed::Worker.destroy_failed_jobs = true
2 2
 Delayed::Worker.max_attempts = 5
3 3
 Delayed::Worker.max_run_time = 20.minutes
4
+Delayed::Worker.read_ahead = 5
4 5
 Delayed::Worker.default_priority = 10
5 6
 Delayed::Worker.delay_jobs = !Rails.env.test?
6 7
 

+ 21 - 0
db/migrate/20140603104211_rename_digest_email_to_email_digest.rb

@@ -0,0 +1,21 @@
1
+class RenameDigestEmailToEmailDigest < ActiveRecord::Migration
2
+  def up
3
+    sql = <<-SQL
4
+      UPDATE #{ActiveRecord::Base.connection.quote_table_name('agents')}
5
+      SET #{ActiveRecord::Base.connection.quote_column_name('type')} = "Agents::EmailDigestAgent"
6
+      WHERE #{ActiveRecord::Base.connection.quote_column_name('type')} = "Agents::DigestEmailAgent"
7
+    SQL
8
+
9
+    execute sql
10
+  end
11
+
12
+  def down
13
+    sql = <<-SQL
14
+      UPDATE #{ActiveRecord::Base.connection.quote_table_name('agents')}
15
+      SET #{ActiveRecord::Base.connection.quote_column_name('type')} = "Agents::DigestEmailAgent"
16
+      WHERE #{ActiveRecord::Base.connection.quote_column_name('type')} = "Agents::EmailDigestAgent"
17
+    SQL
18
+
19
+    execute sql
20
+  end
21
+end

+ 2 - 2
db/seeds.rb

@@ -69,7 +69,7 @@ unless user.agents.where(:name => "Rain Notifier").exists?
69 69
 end
70 70
 
71 71
 unless user.agents.where(:name => "Morning Digest").exists?
72
-  Agent.build_for_type("Agents::DigestEmailAgent", user,
72
+  Agent.build_for_type("Agents::EmailDigestAgent", user,
73 73
                        :name => "Morning Digest",
74 74
                        :schedule => "6am",
75 75
                        :options => { 'subject' => "Your Morning Digest", 'expected_receive_period_in_days' => "30" },
@@ -77,7 +77,7 @@ unless user.agents.where(:name => "Morning Digest").exists?
77 77
 end
78 78
 
79 79
 unless user.agents.where(:name => "Afternoon Digest").exists?
80
-  Agent.build_for_type("Agents::DigestEmailAgent", user,
80
+  Agent.build_for_type("Agents::EmailDigestAgent", user,
81 81
                        :name => "Afternoon Digest",
82 82
                        :schedule => "5pm",
83 83
                        :options => { 'subject' => "Your Afternoon Digest", 'expected_receive_period_in_days' => "7" },

+ 1 - 3
deployment/site-cookbooks/huginn_production/files/default/Procfile

@@ -1,4 +1,2 @@
1 1
 web: sudo bundle exec unicorn_rails -c config/unicorn.rb -E production
2
-schedule: sudo RAILS_ENV=production bundle exec rails runner bin/schedule.rb
3
-twitter: sudo RAILS_ENV=production bundle exec rails runner bin/twitter_stream.rb
4
-dj: sudo RAILS_ENV=production bundle exec script/delayed_job run
2
+jobs: sudo RAILS_ENV=production bundle exec rails runner bin/threaded.rb

+ 87 - 0
lib/huginn_scheduler.rb

@@ -0,0 +1,87 @@
1
+require 'rufus/scheduler'
2
+
3
+class HuginnScheduler
4
+  attr_accessor :mutex
5
+
6
+  def initialize
7
+    @rufus_scheduler = Rufus::Scheduler.new
8
+  end
9
+
10
+  def stop
11
+    @rufus_scheduler.stop
12
+  end
13
+
14
+  def run_schedule(time)
15
+    with_mutex do
16
+      puts "Queuing schedule for #{time}"
17
+      Agent.delay.run_schedule(time)
18
+    end
19
+  end
20
+
21
+  def propagate!
22
+    with_mutex do
23
+      puts "Queuing event propagation"
24
+      Agent.delay.receive!
25
+    end
26
+  end
27
+
28
+  def cleanup_expired_events!
29
+    with_mutex do
30
+      puts "Running event cleanup"
31
+      Event.delay.cleanup_expired!
32
+    end
33
+  end
34
+
35
+  def with_mutex
36
+    ActiveRecord::Base.connection_pool.with_connection do
37
+      mutex.synchronize do
38
+        yield
39
+      end
40
+    end
41
+  end
42
+
43
+  def run!
44
+    self.mutex = Mutex.new
45
+
46
+    tzinfo_friendly_timezone = ActiveSupport::TimeZone::MAPPING[ENV['TIMEZONE'].present? ? ENV['TIMEZONE'] : "Pacific Time (US & Canada)"]
47
+
48
+    # Schedule event propagation.
49
+
50
+    @rufus_scheduler.every '1m' do
51
+      propagate!
52
+    end
53
+
54
+    # Schedule event cleanup.
55
+
56
+    @rufus_scheduler.cron "0 0 * * * " + tzinfo_friendly_timezone do
57
+      cleanup_expired_events!
58
+    end
59
+
60
+    # Schedule repeating events.
61
+
62
+    %w[1m 2m 5m 10m 30m 1h 2h 5h 12h 1d 2d 7d].each do |schedule|
63
+      @rufus_scheduler.every schedule do
64
+        run_schedule "every_#{schedule}"
65
+      end
66
+    end
67
+
68
+    # Schedule events for specific times.
69
+
70
+    # Times are assumed to be in PST for now.  Can store a user#timezone later.
71
+    24.times do |hour|
72
+      @rufus_scheduler.cron "0 #{hour} * * * " + tzinfo_friendly_timezone do
73
+        if hour == 0
74
+          run_schedule "midnight"
75
+        elsif hour < 12
76
+          run_schedule "#{hour}am"
77
+        elsif hour == 12
78
+          run_schedule "noon"
79
+        else
80
+          run_schedule "#{hour - 12}pm"
81
+        end
82
+      end
83
+    end
84
+
85
+    @rufus_scheduler.join
86
+  end
87
+end

+ 125 - 0
lib/twitter_stream.rb

@@ -0,0 +1,125 @@
1
+require 'cgi'
2
+require 'json'
3
+require 'twitter/json_stream'
4
+require 'em-http-request'
5
+require 'pp'
6
+
7
+class TwitterStream
8
+  def initialize
9
+    @running = true
10
+  end
11
+
12
+  def stop
13
+    @running = false
14
+    EventMachine::stop_event_loop if EventMachine.reactor_running?
15
+  end
16
+
17
+  def stream!(filters, agent, &block)
18
+    stream = Twitter::JSONStream.connect(
19
+      :path    => "/1/statuses/#{(filters && filters.length > 0) ? 'filter' : 'sample'}.json#{"?track=#{filters.map {|f| CGI::escape(f) }.join(",")}" if filters && filters.length > 0}",
20
+      :ssl     => true,
21
+      :oauth   => {
22
+        :consumer_key    => agent.twitter_consumer_key,
23
+        :consumer_secret => agent.twitter_consumer_secret,
24
+        :access_key      => agent.twitter_oauth_token,
25
+        :access_secret   => agent.twitter_oauth_token_secret
26
+      }
27
+    )
28
+
29
+    stream.each_item do |status|
30
+      status = JSON.parse(status) if status.is_a?(String)
31
+      next unless status
32
+      next if status.has_key?('delete')
33
+      next unless status['text']
34
+      status['text'] = status['text'].gsub(/&lt;/, "<").gsub(/&gt;/, ">").gsub(/[\t\n\r]/, '  ')
35
+      block.call(status)
36
+    end
37
+
38
+    stream.on_error do |message|
39
+      STDERR.puts " --> Twitter error: #{message} <--"
40
+    end
41
+
42
+    stream.on_no_data do |message|
43
+      STDERR.puts " --> Got no data for awhile; trying to reconnect."
44
+      EventMachine::stop_event_loop
45
+    end
46
+
47
+    stream.on_max_reconnects do |timeout, retries|
48
+      STDERR.puts " --> Oops, tried too many times! <--"
49
+      EventMachine::stop_event_loop
50
+    end
51
+  end
52
+
53
+  def load_and_run(agents)
54
+    agents.group_by { |agent| agent.twitter_oauth_token }.each do |oauth_token, agents|
55
+      filter_to_agent_map = agents.map { |agent| agent.options[:filters] }.flatten.uniq.compact.map(&:strip).inject({}) { |m, f| m[f] = []; m }
56
+
57
+      agents.each do |agent|
58
+        agent.options[:filters].flatten.uniq.compact.map(&:strip).each do |filter|
59
+          filter_to_agent_map[filter] << agent
60
+        end
61
+      end
62
+
63
+      recent_tweets = []
64
+
65
+      stream!(filter_to_agent_map.keys, agents.first) do |status|
66
+        if status["retweeted_status"].present? && status["retweeted_status"].is_a?(Hash)
67
+          puts "Skipping retweet: #{status["text"]}"
68
+        elsif recent_tweets.include?(status["id_str"])
69
+          puts "Skipping duplicate tweet: #{status["text"]}"
70
+        else
71
+          recent_tweets << status["id_str"]
72
+          recent_tweets.shift if recent_tweets.length > DUPLICATE_DETECTION_LENGTH
73
+          puts status["text"]
74
+          filter_to_agent_map.keys.each do |filter|
75
+            if (filter.downcase.split(SEPARATOR) - status["text"].downcase.split(SEPARATOR)).reject(&:empty?) == [] # Hacky McHackerson
76
+              filter_to_agent_map[filter].each do |agent|
77
+                puts " -> #{agent.name}"
78
+                agent.process_tweet(filter, status)
79
+              end
80
+            end
81
+          end
82
+        end
83
+      end
84
+    end
85
+  end
86
+
87
+  RELOAD_TIMEOUT = 10.minutes
88
+  DUPLICATE_DETECTION_LENGTH = 1000
89
+  SEPARATOR = /[^\w_\-]+/
90
+
91
+  def run
92
+    while @running
93
+      begin
94
+        agents = Agents::TwitterStreamAgent.all
95
+
96
+        EventMachine::run do
97
+          EventMachine.add_periodic_timer(RELOAD_TIMEOUT) {
98
+            puts "Reloading EventMachine and all Agents..."
99
+            EventMachine::stop_event_loop
100
+          }
101
+
102
+          if agents.length == 0
103
+            puts "No agents found.  Will look again in a minute."
104
+            sleep 60
105
+            EventMachine::stop_event_loop
106
+          else
107
+            puts "Found #{agents.length} agent(s).  Loading them now..."
108
+            load_and_run agents
109
+          end
110
+        end
111
+
112
+        print "Pausing..."; STDOUT.flush
113
+        sleep 1
114
+        puts "done."
115
+      rescue SignalException, SystemExit
116
+        @running = false
117
+        EventMachine::stop_event_loop if EventMachine.reactor_running?
118
+      rescue StandardError => e
119
+        STDERR.puts "\nException #{e.message}:\n#{e.backtrace.join("\n")}\n\n"
120
+        STDERR.puts "Waiting for a couple of minutes..."
121
+        sleep 120
122
+      end
123
+    end
124
+  end
125
+end

+ 7 - 7
spec/models/agents/digest_email_agent_spec.rb

@@ -1,12 +1,12 @@
1 1
 require 'spec_helper'
2 2
 
3
-describe Agents::DigestEmailAgent do
3
+describe Agents::EmailDigestAgent do
4 4
   def get_message_part(mail, content_type)
5 5
     mail.body.parts.find { |p| p.content_type.match content_type }.body.raw_source
6 6
   end
7 7
 
8 8
   before do
9
-    @checker = Agents::DigestEmailAgent.new(:name => "something", :options => { :expected_receive_period_in_days => 2, :subject => "something interesting" })
9
+    @checker = Agents::EmailDigestAgent.new(:name => "something", :options => { :expected_receive_period_in_days => 2, :subject => "something interesting" })
10 10
     @checker.user = users(:bob)
11 11
     @checker.save!
12 12
   end
@@ -27,14 +27,14 @@ describe Agents::DigestEmailAgent do
27 27
       event2.payload = { :data => "Something else you should know about" }
28 28
       event2.save!
29 29
 
30
-      Agents::DigestEmailAgent.async_receive(@checker.id, [event1.id, event2.id])
30
+      Agents::EmailDigestAgent.async_receive(@checker.id, [event1.id, event2.id])
31 31
       @checker.reload.memory[:queue].should == [{ 'data' => "Something you should know about" }, { 'data' => "Something else you should know about" }]
32 32
     end
33 33
   end
34 34
 
35 35
   describe "#check" do
36 36
     it "should send an email" do
37
-      Agents::DigestEmailAgent.async_check(@checker.id)
37
+      Agents::EmailDigestAgent.async_check(@checker.id)
38 38
       ActionMailer::Base.deliveries.should == []
39 39
 
40 40
       @checker.memory[:queue] = [{ :data => "Something you should know about" },
@@ -44,7 +44,7 @@ describe Agents::DigestEmailAgent do
44 44
       @checker.memory[:events] = [1,2,3,4]
45 45
       @checker.save!
46 46
 
47
-      Agents::DigestEmailAgent.async_check(@checker.id)
47
+      Agents::EmailDigestAgent.async_check(@checker.id)
48 48
       ActionMailer::Base.deliveries.last.to.should == ["bob@example.com"]
49 49
       ActionMailer::Base.deliveries.last.subject.should == "something interesting"
50 50
       get_message_part(ActionMailer::Base.deliveries.last, /plain/).strip.should == "Event\n  data: Something you should know about\n\nFoo\n  bar: 2\n  url: http://google.com\n\nhi\n  woah: there\n\nEvent\n  test: 2"
@@ -61,7 +61,7 @@ describe Agents::DigestEmailAgent do
61 61
       Agent.receive!
62 62
       @checker.reload.memory[:queue].should_not be_empty
63 63
 
64
-      Agents::DigestEmailAgent.async_check(@checker.id)
64
+      Agents::EmailDigestAgent.async_check(@checker.id)
65 65
 
66 66
       plain_email_text = get_message_part(ActionMailer::Base.deliveries.last, /plain/).strip
67 67
       html_email_text = get_message_part(ActionMailer::Base.deliveries.last, /html/).strip
@@ -72,4 +72,4 @@ describe Agents::DigestEmailAgent do
72 72
       @checker.reload.memory[:queue].should be_empty
73 73
     end
74 74
   end
75
-end
75
+end

+ 52 - 0
spec/models/agents/mqtt_agent_spec.rb

@@ -0,0 +1,52 @@
1
+require 'spec_helper'
2
+require 'mqtt'
3
+require './spec/support/fake_mqtt_server'
4
+
5
+describe Agents::MqttAgent do
6
+
7
+  before :each do
8
+    @error_log = StringIO.new
9
+
10
+    @server = MQTT::FakeServer.new(41234, '127.0.0.1')
11
+    @server.just_one = true
12
+    @server.logger = Logger.new(@error_log)
13
+    @server.logger.level = Logger::DEBUG
14
+    @server.start
15
+
16
+    @valid_params = {
17
+      'uri' => "mqtt://#{@server.address}:#{@server.port}",
18
+      'topic' => '/#',
19
+      'max_read_time' => '1',
20
+      'expected_update_period_in_days' => "2"
21
+    }
22
+
23
+    @checker = Agents::MqttAgent.new(
24
+      :name => "somename", 
25
+      :options => @valid_params, 
26
+      :schedule => "midnight",
27
+    )
28
+    @checker.user = users(:jane)
29
+    @checker.save!
30
+  end
31
+
32
+  after :each do
33
+    @server.stop
34
+  end
35
+
36
+  describe "#check" do
37
+    it "should check that initial run creates an event" do
38
+      expect { @checker.check }.to change { Event.count }.by(2)
39
+    end
40
+  end
41
+
42
+  describe "#working?" do
43
+    it "checks if its generating events as scheduled" do
44
+      @checker.should_not be_working
45
+      @checker.check
46
+      @checker.reload.should be_working
47
+      three_days_from_now = 3.days.from_now
48
+      stub(Time).now { three_days_from_now }
49
+      @checker.should_not be_working
50
+    end
51
+  end
52
+end

+ 137 - 0
spec/support/fake_mqtt_server.rb

@@ -0,0 +1,137 @@
1
+#!/usr/bin/env ruby
2
+#
3
+# This is a 'fake' MQTT server to help with testing client implementations
4
+#
5
+# See https://github.com/njh/ruby-mqtt/blob/master/spec/fake_server.rb
6
+#
7
+# It behaves in the following ways:
8
+#   * Responses to CONNECT with a successful CONACK
9
+#   * Responses to PUBLISH by echoing the packet back
10
+#   * Responses to SUBSCRIBE with SUBACK and a PUBLISH to the topic
11
+#   * Responses to PINGREQ with PINGRESP
12
+#   * Responses to DISCONNECT by closing the socket
13
+#
14
+# It has the following restrictions
15
+#   * Doesn't deal with timeouts
16
+#   * Only handles a single connection at a time
17
+#
18
+
19
+$:.unshift File.dirname(__FILE__)+'/../lib'
20
+
21
+require 'logger'
22
+require 'socket'
23
+require 'mqtt'
24
+
25
+
26
+class MQTT::FakeServer
27
+  attr_reader :address, :port
28
+  attr_reader :last_publish
29
+  attr_reader :thread
30
+  attr_reader :pings_received
31
+  attr_accessor :just_one
32
+  attr_accessor :logger
33
+
34
+  # Create a new fake MQTT server
35
+  #
36
+  # If no port is given, bind to a random port number
37
+  # If no bind address is given, bind to localhost
38
+  def initialize(port=nil, bind_address='127.0.0.1')
39
+    @port = port
40
+    @address = bind_address
41
+  end
42
+
43
+  # Get the logger used by the server
44
+  def logger
45
+    @logger ||= Logger.new(STDOUT)
46
+  end
47
+
48
+  # Start the thread and open the socket that will process client connections
49
+  def start
50
+    @socket ||= TCPServer.new(@address, @port)
51
+    @address = @socket.addr[3]
52
+    @port = @socket.addr[1]
53
+    @thread ||= Thread.new do
54
+      logger.info "Started a fake MQTT server on #{@address}:#{@port}"
55
+      loop do
56
+        # Wait for a client to connect
57
+        client = @socket.accept
58
+        @pings_received = 0
59
+        handle_client(client)
60
+        break if just_one
61
+      end
62
+    end
63
+  end
64
+
65
+  # Stop the thread and close the socket
66
+  def stop
67
+    logger.info "Stopping fake MQTT server"
68
+    @socket.close unless @socket.nil?
69
+    @socket = nil
70
+
71
+    @thread.kill if @thread and @thread.alive?
72
+    @thread = nil
73
+  end
74
+
75
+  # Start the server thread and wait for it to finish (possibly never)
76
+  def run
77
+    start
78
+    begin
79
+      @thread.join
80
+    rescue Interrupt
81
+      stop
82
+    end
83
+  end
84
+
85
+
86
+  protected
87
+
88
+  # Given a client socket, process MQTT packets from the client
89
+  def handle_client(client)
90
+    loop do
91
+      packet = MQTT::Packet.read(client)
92
+      logger.debug packet.inspect
93
+
94
+      case packet
95
+        when MQTT::Packet::Connect
96
+          client.write MQTT::Packet::Connack.new(:return_code => 0)
97
+        when MQTT::Packet::Publish
98
+          client.write packet
99
+          @last_publish = packet
100
+        when MQTT::Packet::Subscribe
101
+          client.write MQTT::Packet::Suback.new(
102
+            :message_id => packet.message_id,
103
+            :granted_qos => 0
104
+          )
105
+          topic = packet.topics[0][0]
106
+          client.write MQTT::Packet::Publish.new(
107
+            :topic => topic,
108
+            :payload => "hello #{topic}",
109
+            :retain => true
110
+          )
111
+          client.write MQTT::Packet::Publish.new(
112
+            :topic => topic,
113
+            :payload => "did you know about #{topic}",
114
+            :retain => true
115
+          )
116
+
117
+        when MQTT::Packet::Pingreq
118
+          client.write MQTT::Packet::Pingresp.new
119
+          @pings_received += 1
120
+        when MQTT::Packet::Disconnect
121
+          client.close
122
+        break
123
+      end
124
+    end
125
+
126
+    rescue MQTT::ProtocolException => e
127
+      logger.warn "Protocol error, closing connection: #{e}"
128
+      client.close
129
+  end
130
+
131
+end
132
+
133
+if __FILE__ == $0
134
+  server = MQTT::FakeServer.new(MQTT::DEFAULT_PORT)
135
+  server.logger.level = Logger::DEBUG
136
+  server.run
137
+end